library(tidyverse) # metapackage of all tidyverse packages
Registered S3 methods overwritten by 'dbplyr':
  method         from
  print.tbl_lazy     
  print.tbl_sql      
── Attaching packages ────────────────────────────────── tidyverse 1.3.0 ──
✓ ggplot2 3.3.3     ✓ purrr   0.3.4
✓ tibble  3.0.4     ✓ dplyr   1.0.2
✓ tidyr   1.1.2     ✓ stringr 1.4.0
✓ readr   1.4.0     ✓ forcats 0.5.0
── Conflicts ───────────────────────────────────── tidyverse_conflicts() ──
x dplyr::filter() masks stats::filter()
x dplyr::lag()    masks stats::lag()
library(keras)
use_condaenv("r-reticulate")

5.4.1

Load our model

model <- load_model_hdf5("cats_and_dogs_small_2.h5")
2021-04-11 21:52:31.172247: I tensorflow/core/platform/cpu_feature_guard.cc:143] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA
2021-04-11 21:52:31.191172: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x7fd92eccf8d0 initialized for platform Host (this does not guarantee that XLA will be used). Devices:
2021-04-11 21:52:31.191189: I tensorflow/compiler/xla/service/service.cc:176]   StreamExecutor device (0): Host, Default Version
summary(model)
Model: "sequential_2"
___________________________________________________________________________
Layer (type)                     Output Shape                  Param #     
===========================================================================
conv2d_10 (Conv2D)               (None, 148, 148, 32)          896         
___________________________________________________________________________
max_pooling2d_9 (MaxPooling2D)   (None, 74, 74, 32)            0           
___________________________________________________________________________
conv2d_9 (Conv2D)                (None, 72, 72, 64)            18496       
___________________________________________________________________________
max_pooling2d_8 (MaxPooling2D)   (None, 36, 36, 64)            0           
___________________________________________________________________________
conv2d_8 (Conv2D)                (None, 34, 34, 128)           73856       
___________________________________________________________________________
max_pooling2d_7 (MaxPooling2D)   (None, 17, 17, 128)           0           
___________________________________________________________________________
conv2d_7 (Conv2D)                (None, 15, 15, 128)           147584      
___________________________________________________________________________
max_pooling2d_6 (MaxPooling2D)   (None, 7, 7, 128)             0           
___________________________________________________________________________
flatten_2 (Flatten)              (None, 6272)                  0           
___________________________________________________________________________
dropout (Dropout)                (None, 6272)                  0           
___________________________________________________________________________
dense_5 (Dense)                  (None, 512)                   3211776     
___________________________________________________________________________
dense_4 (Dense)                  (None, 1)                     513         
===========================================================================
Total params: 3,453,121
Trainable params: 3,453,121
Non-trainable params: 0
___________________________________________________________________________

Get input image:

img_path <- "cats_and_dogs_small/test/cats/cat.1700.jpg"
img <- image_load(img_path, target_size = c(150, 150))                 
img_tensor <- image_to_array(img)
img_tensor <- array_reshape(img_tensor, c(1, 150, 150, 3))
img_tensor <- img_tensor / 255                                         
dim(img_tensor)                                                        
[1]   1 150 150   3

take a look at the image

plot(as.raster(img_tensor[1,,,]))

now create the model. Using keras_model instead of keras_sequential_model allows us to access multiple output layers

layer_outputs <- lapply(model$layers[1:8], function(layer) layer$output)      
activation_model <- keras_model(inputs = model$input, outputs = layer_outputs)
layer_outputs[[1]] %>% tensorflow::as.array()
Error: 'as.array' is not an exported object from 'namespace:tensorflow'
activations <- activation_model %>% predict(img_tensor)         
str(activations)
List of 8
 $ : num [1, 1:148, 1:148, 1:32] 0.00837 0.01414 0.00918 0.01291 0.00825 ...
 $ : num [1, 1:74, 1:74, 1:32] 0.0141 0.0133 0.0178 0.021 0.0205 ...
 $ : num [1, 1:72, 1:72, 1:64] 0 0 0 0 0 0 0 0 0 0 ...
 $ : num [1, 1:36, 1:36, 1:64] 0 0 0 0 0 0 0 0 0 0 ...
 $ : num [1, 1:34, 1:34, 1:128] 0.00386 0 0 0 0 ...
 $ : num [1, 1:17, 1:17, 1:128] 0.00386 0 0 0.0131 0.00835 ...
 $ : num [1, 1:15, 1:15, 1:128] 0.01514 0 0.01075 0.00422 0.01076 ...
 $ : num [1, 1:7, 1:7, 1:128] 0.02181 0.01075 0.01471 0.00678 0.02665 ...

define plotting function

plot_channel <- function(channel) {
  rotate <- function(x) t(apply(x, 2, rev))
  image(rotate(channel), axes = FALSE, asp = 1,
        col = topo.colors(12))
}

plot them all

image_size <- 58
images_per_row <- 16

for (i in 1:8) {

  layer_activation <- activations[[i]]
  layer_name <- model$layers[[i]]$name

  n_features <- dim(layer_activation)[[4]]
  n_cols <- n_features %/% images_per_row

  #png(paste0("cat_activations_", i, "_", layer_name, ".png"),
   #   width = image_size * images_per_row,
  #    height = image_size * n_cols)
  op <- par(mfrow = c(n_cols, images_per_row), mai = rep_len(0.02, 4))

  for (col in 0:(n_cols-1)) {
    for (row in 0:(images_per_row-1)) {
      channel_image <- layer_activation[1,,,(col*images_per_row) + row + 1]
      print(plot_channel(channel_image))
    }
  }

  par(op)
  #dev.off()
}
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL
NULL

5.4.2

visualizing the filters

set up the loss function

  weights = "imagenet",
Error: unexpected ',' in "  weights = "imagenet","

get the gradient associated with the above loss and normalize (RMS)

grads <- k_gradients(loss, model$input)[[1]]  
grads <- grads / (k_sqrt(k_mean(k_square(grads))) + 1e-5) 

now we need to be able to calculate loss and gradient for a given input. We use iterate for this:

iterate <- k_function(list(model$input), list(loss, grads))
c(loss_value, grads_value) %<-%
    iterate(list(array(0, dim = c(1, 150, 150, 3))))

put it together into a loop

input_img_data <-                                                     
  array(runif(150 * 150 * 3), dim = c(1, 150, 150, 3)) * 20 + 128    # input random image, near grey 
step <- 1
for (i in 1:40) {                                                     
  c(loss_value, grads_value) %<-% iterate(list(input_img_data)) # calculate gradient and loss
  cat("loss: ", loss_value, "\n")
  cat("grads_value: ", grads_value[1,1:5,1,1], "\n")
  input_img_data <- input_img_data + (grads_value * step) # update image     
}
loss:  68.10915 
grads_value:  -0.02055234 0.0124106 0.06712954 0.07983842 0.09905355 
loss:  148.9816 
grads_value:  -0.02367963 0.01027688 0.06084429 0.05776044 0.05121794 
loss:  248.6013 
grads_value:  -0.02376306 -0.001260084 0.03469283 0.04041674 0.05864409 
loss:  368.2984 
grads_value:  -0.02290497 0.001561245 0.039823 0.04660433 0.07228445 
loss:  499.641 
grads_value:  -0.01960171 0.009522016 0.04265621 0.0478622 0.09018818 
loss:  635.6437 
grads_value:  -0.02612916 -0.01237173 0.0002258365 0.0184661 0.1257058 
loss:  773.5628 
grads_value:  0.01547417 0.08030173 0.03711985 0.03134648 0.1461218 
loss:  910.878 
grads_value:  0.008887916 0.07132961 0.03514753 0.0262162 0.1337125 
loss:  1047.179 
grads_value:  0.007650015 0.06760189 0.04024948 0.05167056 0.1758872 
loss:  1182.105 
grads_value:  0.009264603 0.07076927 0.02188255 0.03188983 0.1244984 
loss:  1315.951 
grads_value:  0.01880095 0.08757914 0.02381711 0.01899291 0.1269091 
loss:  1448.217 
grads_value:  0.01859585 0.1026091 0.04526343 0.0266121 0.1345554 
loss:  1579.036 
grads_value:  0.04461221 0.2355232 0.2012401 0.1943541 0.376265 
loss:  1708.379 
grads_value:  0.06870421 0.2384596 0.1931398 0.1444489 0.2920649 
loss:  1836.64 
grads_value:  0.08103324 0.3586213 0.2994048 0.180763 0.3360555 
loss:  1963.63 
grads_value:  0.09606591 0.3835185 0.3049706 0.1680665 0.4143465 
loss:  2089.055 
grads_value:  0.04516215 0.3836091 0.3454892 0.1179104 0.3556658 
loss:  2213.533 
grads_value:  0.05075496 0.4168369 0.3764934 0.1279662 0.3292925 
loss:  2336.879 
grads_value:  0.04339589 0.4811139 0.410225 0.1536793 0.5119777 
loss:  2459.118 
grads_value:  0.03037276 0.5005596 0.3975278 0.05920871 0.3628269 
loss:  2580.301 
grads_value:  0.04027561 0.5302778 0.3662272 0.06176559 0.372285 
loss:  2700.705 
grads_value:  0.08396044 0.5741707 0.3705891 0.07760627 0.4446297 
loss:  2820.42 
grads_value:  0.09024354 0.6080343 0.4015453 0.1119668 0.5001155 
loss:  2939.647 
grads_value:  0.09923641 0.6195131 0.4084836 0.0895992 0.4790891 
loss:  3058.214 
grads_value:  0.1463298 0.6245549 0.3951112 0.1092209 0.5340443 
loss:  3176.266 
grads_value:  0.1509034 0.633091 0.3740603 0.05978949 0.4644986 
loss:  3293.812 
grads_value:  0.1764331 0.6503105 0.3870803 -0.0221213 0.3533002 
loss:  3410.654 
grads_value:  0.1931657 0.6750054 0.408454 0.05900301 0.4735953 
loss:  3526.646 
grads_value:  0.1990764 0.6735084 0.408569 0.03929114 0.4508763 
loss:  3641.977 
grads_value:  0.1652934 0.6494454 0.3820862 -0.01116045 0.3897472 
loss:  3756.678 
grads_value:  0.1833004 0.6597424 0.4220552 0.05023104 0.4164359 
loss:  3870.92 
grads_value:  0.1640288 0.6446196 0.3983006 -0.008581717 0.366553 
loss:  3984.546 
grads_value:  0.1870599 0.6434796 0.4097235 0.05200551 0.4438059 
loss:  4097.761 
grads_value:  0.1900522 0.6500507 0.4007178 0.02511647 0.39864 
loss:  4210.431 
grads_value:  0.2019599 0.6114587 0.3322644 0.07159135 0.4443951 
loss:  4322.678 
grads_value:  0.1743665 0.5909153 0.3214991 0.01573551 0.4171126 
loss:  4434.57 
grads_value:  0.1418489 0.5958902 0.2778648 -0.04889731 0.4553466 
loss:  4545.955 
grads_value:  0.08841515 0.5324376 0.1339925 -0.1929437 0.4394998 
loss:  4656.972 
grads_value:  0.07162858 0.5048009 0.1342231 -0.2020071 0.4424397 
loss:  4767.597 
grads_value:  0.08485744 0.488587 0.05986075 -0.2134876 0.4643567 

gradient ascent because we are increasing the loss?

post process the tensor so that we can dispaly it as an image:

deprocess_image <- function(x) {
  dms <- dim(x)
  x <- x - mean(x)                        
  x <- x / (sd(x) + 1e-5)                 
  x <- x * 0.1                            
  x <- x + 0.5                            
  x <- pmax(0, pmin(x, 1))                
  array(x, dim = dms)                     
}

put it all together in a function

generate_pattern <- function(layer_name, filter_index, size = 150) {
  layer_output <- model$get_layer(layer_name)$output                      
  loss <- k_mean(layer_output[,,,filter_index])                           
  grads <- k_gradients(loss, model$input)[[1]]                            
  grads <- grads / (k_sqrt(k_mean(k_square(grads))) + 1e-5)               
  iterate <- k_function(list(model$input), list(loss, grads))             
  input_img_data <-                                                       
    array(runif(size * size * 3), dim = c(1, size, size, 3)) * 20 + 128   
  step <- 1                                                               
  for (i in 1:40) {                                                       
    c(loss_value, grads_value) %<-% iterate(list(input_img_data))         
    input_img_data <- input_img_data + (grads_value * step)               
  }                                                                       
  img <- input_img_data[1,,,]
  deprocess_image(img)
}
library(grid)
grid.raster(generate_pattern("block3_conv1", 1))
library(grid)
library(gridExtra)
dir.create("vgg_filters")
for (layer_name in c("block1_conv1", "block2_conv1",
                     "block3_conv1", "block4_conv1")) {
  size <- 140

  png(paste0("vgg_filters/", layer_name, ".png"),
      width = 8 * size, height = 8 * size)

  grobs <- list()
  for (i in 0:7) {
    for (j in 0:7) {
      pattern <- generate_pattern(layer_name, i + (j*8) + 1, size = size)
      grob <- rasterGrob(pattern,
                         width = unit(0.9, "npc"),
                         height = unit(0.9, "npc"))
      grobs[[length(grobs)+1]] <- grob
    }
  }

  grid.arrange(grobs = grobs, ncol = 8)
  dev.off()
}

5.4.3 heat maps

model <- application_vgg16(weights = "imagenet")            
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels.h5

     8192/553467096 [..............................] - ETA: 23s
    40960/553467096 [..............................] - ETA: 12:41
   147456/553467096 [..............................] - ETA: 6:39 
   483328/553467096 [..............................] - ETA: 2:59
  1679360/553467096 [..............................] - ETA: 1:07
  4153344/553467096 [..............................] - ETA: 33s 
  5898240/553467096 [..............................] - ETA: 28s
  8372224/553467096 [..............................] - ETA: 25s
 12386304/553467096 [..............................] - ETA: 19s
 14696448/553467096 [..............................] - ETA: 17s
 16293888/553467096 [..............................] - ETA: 17s
 17801216/553467096 [..............................] - ETA: 17s
 21413888/553467096 [>.............................] - ETA: 15s
 24338432/553467096 [>.............................] - ETA: 15s
 27107328/553467096 [>.............................] - ETA: 14s
 29827072/553467096 [>.............................] - ETA: 13s
 32391168/553467096 [>.............................] - ETA: 13s
 35348480/553467096 [>.............................] - ETA: 13s
 37437440/553467096 [=>............................] - ETA: 12s
 40509440/553467096 [=>............................] - ETA: 12s
 42336256/553467096 [=>............................] - ETA: 12s
 45178880/553467096 [=>............................] - ETA: 12s
 48431104/553467096 [=>............................] - ETA: 12s
 51683328/553467096 [=>............................] - ETA: 11s
 53878784/553467096 [=>............................] - ETA: 11s
 56950784/553467096 [==>...........................] - ETA: 11s
 59760640/553467096 [==>...........................] - ETA: 11s
 62046208/553467096 [==>...........................] - ETA: 11s
 64880640/553467096 [==>...........................] - ETA: 10s
 67649536/553467096 [==>...........................] - ETA: 10s
 69124096/553467096 [==>...........................] - ETA: 10s
 70918144/553467096 [==>...........................] - ETA: 10s
 75038720/553467096 [===>..........................] - ETA: 10s
 76718080/553467096 [===>..........................] - ETA: 10s
 77406208/553467096 [===>..........................] - ETA: 10s
 82026496/553467096 [===>..........................] - ETA: 10s
 85172224/553467096 [===>..........................] - ETA: 10s
 87130112/553467096 [===>..........................] - ETA: 10s
 89964544/553467096 [===>..........................] - ETA: 10s
 91545600/553467096 [===>..........................] - ETA: 10s
 94920704/553467096 [====>.........................] - ETA: 10s
 97263616/553467096 [====>.........................] - ETA: 9s 
100319232/553467096 [====>.........................] - ETA: 9s
100859904/553467096 [====>.........................] - ETA: 10s
103522304/553467096 [====>.........................] - ETA: 10s
105390080/553467096 [====>.........................] - ETA: 10s
109060096/553467096 [====>.........................] - ETA: 10s
112123904/553467096 [=====>........................] - ETA: 9s 
115294208/553467096 [=====>........................] - ETA: 9s
118652928/553467096 [=====>........................] - ETA: 9s
121618432/553467096 [=====>........................] - ETA: 9s
124092416/553467096 [=====>........................] - ETA: 9s
128204800/553467096 [=====>........................] - ETA: 9s
131891200/553467096 [======>.......................] - ETA: 9s
135192576/553467096 [======>.......................] - ETA: 9s
137314304/553467096 [======>.......................] - ETA: 9s
139935744/553467096 [======>.......................] - ETA: 9s
142614528/553467096 [======>.......................] - ETA: 8s
145580032/553467096 [======>.......................] - ETA: 8s
148275200/553467096 [=======>......................] - ETA: 8s
151011328/553467096 [=======>......................] - ETA: 8s
153714688/553467096 [=======>......................] - ETA: 8s
156450816/553467096 [=======>......................] - ETA: 8s
158425088/553467096 [=======>......................] - ETA: 8s
159621120/553467096 [=======>......................] - ETA: 8s
164675584/553467096 [=======>......................] - ETA: 8s
167460864/553467096 [========>.....................] - ETA: 8s
170237952/553467096 [========>.....................] - ETA: 8s
172793856/553467096 [========>.....................] - ETA: 8s
175415296/553467096 [========>.....................] - ETA: 8s
178167808/553467096 [========>.....................] - ETA: 7s
180584448/553467096 [========>.....................] - ETA: 7s
183050240/553467096 [========>.....................] - ETA: 7s
187899904/553467096 [=========>....................] - ETA: 7s
190488576/553467096 [=========>....................] - ETA: 7s
192569344/553467096 [=========>....................] - ETA: 7s
195166208/553467096 [=========>....................] - ETA: 7s
197541888/553467096 [=========>....................] - ETA: 7s
200384512/553467096 [=========>....................] - ETA: 7s
203186176/553467096 [==========>...................] - ETA: 7s
206020608/553467096 [==========>...................] - ETA: 7s
207757312/553467096 [==========>...................] - ETA: 7s
210182144/553467096 [==========>...................] - ETA: 7s
214106112/553467096 [==========>...................] - ETA: 7s
217726976/553467096 [==========>...................] - ETA: 7s
220536832/553467096 [==========>...................] - ETA: 6s
222789632/553467096 [===========>..................] - ETA: 6s
223698944/553467096 [===========>..................] - ETA: 7s
228974592/553467096 [===========>..................] - ETA: 6s
231702528/553467096 [===========>..................] - ETA: 6s
233766912/553467096 [===========>..................] - ETA: 6s
237543424/553467096 [===========>..................] - ETA: 6s
240222208/553467096 [============>.................] - ETA: 6s
242417664/553467096 [============>.................] - ETA: 6s
244621312/553467096 [============>.................] - ETA: 6s
244899840/553467096 [============>.................] - ETA: 6s
250085376/553467096 [============>.................] - ETA: 6s
253009920/553467096 [============>.................] - ETA: 6s
255770624/553467096 [============>.................] - ETA: 6s
258490368/553467096 [=============>................] - ETA: 6s
261324800/553467096 [=============>................] - ETA: 6s
264118272/553467096 [=============>................] - ETA: 5s
267018240/553467096 [=============>................] - ETA: 5s
269967360/553467096 [=============>................] - ETA: 5s
272719872/553467096 [=============>................] - ETA: 5s
275439616/553467096 [=============>................] - ETA: 5s
275636224/553467096 [=============>................] - ETA: 5s
278609920/553467096 [==============>...............] - ETA: 5s
281354240/553467096 [==============>...............] - ETA: 5s
284188672/553467096 [==============>...............] - ETA: 5s
286670848/553467096 [==============>...............] - ETA: 5s
289153024/553467096 [==============>...............] - ETA: 5s
292241408/553467096 [==============>...............] - ETA: 5s
295944192/553467096 [===============>..............] - ETA: 5s
298795008/553467096 [===============>..............] - ETA: 5s
301629440/553467096 [===============>..............] - ETA: 5s
303931392/553467096 [===============>..............] - ETA: 5s
306724864/553467096 [===============>..............] - ETA: 5s
309559296/553467096 [===============>..............] - ETA: 5s
311435264/553467096 [===============>..............] - ETA: 5s
313909248/553467096 [================>.............] - ETA: 4s
318521344/553467096 [================>.............] - ETA: 4s
321732608/553467096 [================>.............] - ETA: 4s
324378624/553467096 [================>.............] - ETA: 4s
326385664/553467096 [================>.............] - ETA: 4s
328859648/553467096 [================>.............] - ETA: 4s
333758464/553467096 [=================>............] - ETA: 4s
335904768/553467096 [=================>............] - ETA: 4s
337477632/553467096 [=================>............] - ETA: 4s
340353024/553467096 [=================>............] - ETA: 4s
343941120/553467096 [=================>............] - ETA: 4s
347004928/553467096 [=================>............] - ETA: 4s
349749248/553467096 [=================>............] - ETA: 4s
352288768/553467096 [==================>...........] - ETA: 4s
354476032/553467096 [==================>...........] - ETA: 4s
358219776/553467096 [==================>...........] - ETA: 4s
360701952/553467096 [==================>...........] - ETA: 3s
363175936/553467096 [==================>...........] - ETA: 3s
368525312/553467096 [==================>...........] - ETA: 3s
371326976/553467096 [===================>..........] - ETA: 3s
372506624/553467096 [===================>..........] - ETA: 3s
373497856/553467096 [===================>..........] - ETA: 3s
375349248/553467096 [===================>..........] - ETA: 3s
376602624/553467096 [===================>..........] - ETA: 3s
378937344/553467096 [===================>..........] - ETA: 3s
380895232/553467096 [===================>..........] - ETA: 3s
383647744/553467096 [===================>..........] - ETA: 3s
385884160/553467096 [===================>..........] - ETA: 3s
387612672/553467096 [====================>.........] - ETA: 3s
390578176/553467096 [====================>.........] - ETA: 3s
393355264/553467096 [====================>.........] - ETA: 3s
395206656/553467096 [====================>.........] - ETA: 3s
397025280/553467096 [====================>.........] - ETA: 3s
398581760/553467096 [====================>.........] - ETA: 3s
401260544/553467096 [====================>.........] - ETA: 3s
403177472/553467096 [====================>.........] - ETA: 3s
405454848/553467096 [====================>.........] - ETA: 3s
407109632/553467096 [=====================>........] - ETA: 3s
410599424/553467096 [=====================>........] - ETA: 2s
413425664/553467096 [=====================>........] - ETA: 2s
415375360/553467096 [=====================>........] - ETA: 2s
418095104/553467096 [=====================>........] - ETA: 2s
420806656/553467096 [=====================>........] - ETA: 2s
423485440/553467096 [=====================>........] - ETA: 2s
423903232/553467096 [=====================>........] - ETA: 2s
426369024/553467096 [======================>.......] - ETA: 2s
427524096/553467096 [======================>.......] - ETA: 2s
429613056/553467096 [======================>.......] - ETA: 2s
433152000/553467096 [======================>.......] - ETA: 2s
434233344/553467096 [======================>.......] - ETA: 2s
436215808/553467096 [======================>.......] - ETA: 2s
437420032/553467096 [======================>.......] - ETA: 2s
439336960/553467096 [======================>.......] - ETA: 2s
441516032/553467096 [======================>.......] - ETA: 2s
443817984/553467096 [=======================>......] - ETA: 2s
445964288/553467096 [=======================>......] - ETA: 2s
448454656/553467096 [=======================>......] - ETA: 2s
450895872/553467096 [=======================>......] - ETA: 2s
451764224/553467096 [=======================>......] - ETA: 2s
452993024/553467096 [=======================>......] - ETA: 2s
454197248/553467096 [=======================>......] - ETA: 2s
456859648/553467096 [=======================>......] - ETA: 2s
459800576/553467096 [=======================>......] - ETA: 2s
462577664/553467096 [========================>.....] - ETA: 1s
465182720/553467096 [========================>.....] - ETA: 1s
466968576/553467096 [========================>.....] - ETA: 1s
468492288/553467096 [========================>.....] - ETA: 1s
470933504/553467096 [========================>.....] - ETA: 1s
474398720/553467096 [========================>.....] - ETA: 1s
477249536/553467096 [========================>.....] - ETA: 1s
478445568/553467096 [========================>.....] - ETA: 1s
479502336/553467096 [========================>.....] - ETA: 1s
480198656/553467096 [=========================>....] - ETA: 1s
483270656/553467096 [=========================>....] - ETA: 1s
485294080/553467096 [=========================>....] - ETA: 1s
487030784/553467096 [=========================>....] - ETA: 1s
489545728/553467096 [=========================>....] - ETA: 1s
491495424/553467096 [=========================>....] - ETA: 1s
494354432/553467096 [=========================>....] - ETA: 1s
497696768/553467096 [=========================>....] - ETA: 1s
498417664/553467096 [==========================>...] - ETA: 1s
501170176/553467096 [==========================>...] - ETA: 1s
503496704/553467096 [==========================>...] - ETA: 1s
505315328/553467096 [==========================>...] - ETA: 1s
506781696/553467096 [==========================>...] - ETA: 1s
508485632/553467096 [==========================>...] - ETA: 0s
510402560/553467096 [==========================>...] - ETA: 0s
514064384/553467096 [==========================>...] - ETA: 0s
517898240/553467096 [===========================>..] - ETA: 0s
520560640/553467096 [===========================>..] - ETA: 0s
523485184/553467096 [===========================>..] - ETA: 0s
526352384/553467096 [===========================>..] - ETA: 0s
528490496/553467096 [===========================>..] - ETA: 0s
531390464/553467096 [===========================>..] - ETA: 0s
534347776/553467096 [===========================>..] - ETA: 0s
537042944/553467096 [============================>.] - ETA: 0s
539975680/553467096 [============================>.] - ETA: 0s
541720576/553467096 [============================>.] - ETA: 0s
543571968/553467096 [============================>.] - ETA: 0s
547168256/553467096 [============================>.] - ETA: 0s
550068224/553467096 [============================>.] - ETA: 0s
552861696/553467096 [============================>.] - ETA: 0s
553467904/553467096 [==============================] - 12s 0us/step
img_path <- "creative_commons_elephant.jpg"              
img <- image_load(img_path, target_size = c(224, 224)) %>%           
  image_to_array() %>%                                               
  array_reshape(dim = c(1, 224, 224, 3)) %>%                         
  imagenet_preprocess_input()                                        
 preds <- model %>% predict(img)

 imagenet_decode_predictions(preds, top = 3)[[1]]
Downloading data from https://storage.googleapis.com/download.tensorflow.org/data/imagenet_class_index.json

 8192/35363 [=====>........................] - ETA: 0s
40960/35363 [==================================] - 0s 1us/step
which.max(preds[1,])
[1] 387
summary(model)
Model: "vgg16"
___________________________________________________________________________________
Layer (type)                         Output Shape                     Param #      
===================================================================================
input_4 (InputLayer)                 [(None, 224, 224, 3)]            0            
___________________________________________________________________________________
block1_conv1 (Conv2D)                (None, 224, 224, 64)             1792         
___________________________________________________________________________________
block1_conv2 (Conv2D)                (None, 224, 224, 64)             36928        
___________________________________________________________________________________
block1_pool (MaxPooling2D)           (None, 112, 112, 64)             0            
___________________________________________________________________________________
block2_conv1 (Conv2D)                (None, 112, 112, 128)            73856        
___________________________________________________________________________________
block2_conv2 (Conv2D)                (None, 112, 112, 128)            147584       
___________________________________________________________________________________
block2_pool (MaxPooling2D)           (None, 56, 56, 128)              0            
___________________________________________________________________________________
block3_conv1 (Conv2D)                (None, 56, 56, 256)              295168       
___________________________________________________________________________________
block3_conv2 (Conv2D)                (None, 56, 56, 256)              590080       
___________________________________________________________________________________
block3_conv3 (Conv2D)                (None, 56, 56, 256)              590080       
___________________________________________________________________________________
block3_pool (MaxPooling2D)           (None, 28, 28, 256)              0            
___________________________________________________________________________________
block4_conv1 (Conv2D)                (None, 28, 28, 512)              1180160      
___________________________________________________________________________________
block4_conv2 (Conv2D)                (None, 28, 28, 512)              2359808      
___________________________________________________________________________________
block4_conv3 (Conv2D)                (None, 28, 28, 512)              2359808      
___________________________________________________________________________________
block4_pool (MaxPooling2D)           (None, 14, 14, 512)              0            
___________________________________________________________________________________
block5_conv1 (Conv2D)                (None, 14, 14, 512)              2359808      
___________________________________________________________________________________
block5_conv2 (Conv2D)                (None, 14, 14, 512)              2359808      
___________________________________________________________________________________
block5_conv3 (Conv2D)                (None, 14, 14, 512)              2359808      
___________________________________________________________________________________
block5_pool (MaxPooling2D)           (None, 7, 7, 512)                0            
___________________________________________________________________________________
flatten (Flatten)                    (None, 25088)                    0            
___________________________________________________________________________________
fc1 (Dense)                          (None, 4096)                     102764544    
___________________________________________________________________________________
fc2 (Dense)                          (None, 4096)                     16781312     
___________________________________________________________________________________
predictions (Dense)                  (None, 1000)                     4097000      
===================================================================================
Total params: 138,357,544
Trainable params: 138,357,544
Non-trainable params: 0
___________________________________________________________________________________
african_elephant_output <- model$output[, 387]                             
last_conv_layer <- model %>% get_layer("block5_conv3")                     
grads <- k_gradients(african_elephant_output, last_conv_layer$output)[[1]] 
pooled_grads <- k_mean(grads, axis = c(1, 2, 3))                           
iterate <- k_function(list(model$input),                                   
                      list(pooled_grads, last_conv_layer$output[1,,,]))
c(pooled_grads_value, conv_layer_output_value) %<-% iterate(list(img))     
for (i in 1:512) {       # 512 channels                                             
  conv_layer_output_value[,,i] <-
    conv_layer_output_value[,,i] * pooled_grads_value[[i]]
}
heatmap <- apply(conv_layer_output_value, c(1,2), mean)                    
heatmap <- pmax(heatmap, 0)
heatmap <- heatmap / max(heatmap)                                          
plot_heatmap <- function(heatmap, filename, width = 224, height = 224,    
                          bg = "white", col = terrain.colors(12)) {
  png(filename, width = width, height = height, bg = bg)
  op = par(mar = c(0,0,0,0))
  on.exit({par(op); dev.off()}, add = TRUE)
  rotate <- function(x) t(apply(x, 2, rev))
  image(rotate(heatmap), axes = FALSE, asp = 1, col = col)
}
write_heatmap(heatmap, "elephant_heatmap.png")                             
library(magick)
Linking to ImageMagick 6.9.11.32
Enabled features: cairo, fontconfig, freetype, lcms, pango, rsvg, webp
Disabled features: fftw, ghostscript, x11
library(viridis)
Loading required package: viridisLite
image <- image_read(img_path)                                      
info <- image_info(image)
geometry <- sprintf("%dx%d!", info$width, info$height)
pal <- col2rgb(viridis(20), alpha = TRUE)                          
alpha <- floor(seq(0, 255, length = ncol(pal)))
pal_col <- rgb(t(pal), alpha = alpha, maxColorValue = 255)
write_heatmap(heatmap, "elephant_overlay.png",
              width = 14, height = 14, bg = NA, col = pal_col)
image_read("elephant_overlay.png") %>%                             
  image_resize(geometry, filter = "quadratic") %>%
  image_composite(image, operator = "blend", compose_args = "20") %>%
  plot()

---
title: "Chapter 5.4"
author: "Julin Maloof"
date: "4/10/2021"
output: html_notebook
---

```{r}
library(tidyverse) # metapackage of all tidyverse packages
library(keras)
use_condaenv("r-reticulate")
```


## 5.4.1

Load our model

```{r}
model <- load_model_hdf5("cats_and_dogs_small_2.h5")
summary(model)
```

Get input image:

```{r}
img_path <- "cats_and_dogs_small/test/cats/cat.1700.jpg"
img <- image_load(img_path, target_size = c(150, 150))                 
img_tensor <- image_to_array(img)
img_tensor <- array_reshape(img_tensor, c(1, 150, 150, 3))
img_tensor <- img_tensor / 255                                         
dim(img_tensor)                                                        
```

take a look at the image
```{r}
plot(as.raster(img_tensor[1,,,]))
```

now create the model.  Using `keras_model` instead of `keras_sequential_model` allows us to access multiple output layers

```{r}
layer_outputs <- lapply(model$layers[1:8], function(layer) layer$output)      
activation_model <- keras_model(inputs = model$input, outputs = layer_outputs)
```


```{r}
str(layer_outputs) # at this point I think it is just a list of (empty) tensors
```

```{r}
activations <- activation_model %>% predict(img_tensor)         
```

```{r}
str(activations)
```

define plotting function
```{r}
plot_channel <- function(channel) {
  rotate <- function(x) t(apply(x, 2, rev))
  image(rotate(channel), axes = FALSE, asp = 1,
        col = topo.colors(12))
}
```

```{r}
first_layer_activation <- activations[[1]]
dim(first_layer_activation)
plot_channel(first_layer_activation[1,,,2])
plot_channel(first_layer_activation[1,,,7])
```
plot them all

```{r}
image_size <- 58
images_per_row <- 16

for (i in 1:8) {

  layer_activation <- activations[[i]]
  layer_name <- model$layers[[i]]$name

  n_features <- dim(layer_activation)[[4]]
  n_cols <- n_features %/% images_per_row

  #png(paste0("cat_activations_", i, "_", layer_name, ".png"),
   #   width = image_size * images_per_row,
  #    height = image_size * n_cols)
  op <- par(mfrow = c(n_cols, images_per_row), mai = rep_len(0.02, 4))

  for (col in 0:(n_cols-1)) {
    for (row in 0:(images_per_row-1)) {
      channel_image <- layer_activation[1,,,(col*images_per_row) + row + 1]
      plot_channel(channel_image)
    }
  }

  par(op)
  #dev.off()
}
```

## 5.4.2

visualizing the filters

set up the loss function
```{r}
library(keras)
library(tensorflow)
tf$compat$v1$disable_eager_execution()
model <- application_vgg16(
  weights = "imagenet",
  include_top = FALSE
)
layer_name <- "block3_conv1"
filter_index <- 1
layer_output <- get_layer(model, layer_name)$output
loss <- k_mean(layer_output[,,,filter_index]) # average output as a tensor
```


get the gradient associated with the above loss and normalize (RMS)
```{r}
grads <- k_gradients(loss, model$input)[[1]]  
grads <- grads / (k_sqrt(k_mean(k_square(grads))) + 1e-5) # as a tensor
```

now we need to be able to calculate loss and gradient for a given input.  We use iterate for this:
```{r}
iterate <- k_function(list(model$input), list(loss, grads))
c(loss_value, grads_value) %<-%
    iterate(list(array(0, dim = c(1, 150, 150, 3))))
```

put it together into a loop
```{r}
input_img_data <-                                                     
  array(runif(150 * 150 * 3), dim = c(1, 150, 150, 3)) * 20 + 128    # input random image, near grey 
step <- 1
for (i in 1:40) {                                                     
  c(loss_value, grads_value) %<-% iterate(list(input_img_data)) # calculate gradient and loss
  cat("loss: ", loss_value, "\n")
  cat("grads_value: ", grads_value[1,1:5,1,1], "\n")
  input_img_data <- input_img_data + (grads_value * step) # update image     
}

```
gradient ascent because we are increasing the loss?

post process the tensor so that we can dispaly it as an image:

```{r}
deprocess_image <- function(x) {
  dms <- dim(x)
  x <- x - mean(x)                        
  x <- x / (sd(x) + 1e-5)                 
  x <- x * 0.1                            
  x <- x + 0.5                            
  x <- pmax(0, pmin(x, 1))                
  array(x, dim = dms)                     
}
```

put it all together in a function
```{r}
generate_pattern <- function(layer_name, filter_index, size = 150) {
  layer_output <- model$get_layer(layer_name)$output                      
  loss <- k_mean(layer_output[,,,filter_index])                           
  grads <- k_gradients(loss, model$input)[[1]]                            
  grads <- grads / (k_sqrt(k_mean(k_square(grads))) + 1e-5)               
  iterate <- k_function(list(model$input), list(loss, grads))             
  input_img_data <-                                                       
    array(runif(size * size * 3), dim = c(1, size, size, 3)) * 20 + 128   
  step <- 1                                                               
  for (i in 1:40) {                                                       
    c(loss_value, grads_value) %<-% iterate(list(input_img_data))         
    input_img_data <- input_img_data + (grads_value * step)               
  }                                                                       
  img <- input_img_data[1,,,]
  deprocess_image(img)
}
```

```{r}
library(grid)
grid.raster(generate_pattern("block3_conv1", 1))
```

```{r, eval=FALSE}
library(grid)
library(gridExtra)
dir.create("vgg_filters")
for (layer_name in c("block1_conv1", "block2_conv1",
                     "block3_conv1", "block4_conv1")) {
  size <- 140

  png(paste0("vgg_filters/", layer_name, ".png"),
      width = 8 * size, height = 8 * size)

  grobs <- list()
  for (i in 0:7) {
    for (j in 0:7) {
      pattern <- generate_pattern(layer_name, i + (j*8) + 1, size = size)
      grob <- rasterGrob(pattern,
                         width = unit(0.9, "npc"),
                         height = unit(0.9, "npc"))
      grobs[[length(grobs)+1]] <- grob
    }
  }

  grid.arrange(grobs = grobs, ncol = 8)
  dev.off()
}
```

![](vgg_filters/block1_conv1.png)
![](vgg_filters/block2_conv1.png)

![](vgg_filters/block3_conv1.png)

![](vgg_filters/block4_conv1.png)

## 5.4.3 heat maps

```{r}
model <- application_vgg16(weights = "imagenet")            
```

```{r}
img_path <- "creative_commons_elephant.jpg"              
img <- image_load(img_path, target_size = c(224, 224)) %>%           
  image_to_array() %>%                                               
  array_reshape(dim = c(1, 224, 224, 3)) %>%                         
  imagenet_preprocess_input()                                        
```

```{r}
 preds <- model %>% predict(img)

 imagenet_decode_predictions(preds, top = 3)[[1]]
```

```{r}
which.max(preds[1,])
```

```{r}
summary(model)
```


```{r}
african_elephant_output <- model$output[, 387]                             
last_conv_layer <- model %>% get_layer("block5_conv3")                     
grads <- k_gradients(african_elephant_output, last_conv_layer$output)[[1]] 
pooled_grads <- k_mean(grads, axis = c(1, 2, 3))                           
iterate <- k_function(list(model$input),                                   
                      list(pooled_grads, last_conv_layer$output[1,,,]))
c(pooled_grads_value, conv_layer_output_value) %<-% iterate(list(img))     
for (i in 1:512) {       # 512 channels                                             
  conv_layer_output_value[,,i] <-
    conv_layer_output_value[,,i] * pooled_grads_value[[i]]
}
heatmap <- apply(conv_layer_output_value, c(1,2), mean)                    
```



```{r}
heatmap <- pmax(heatmap, 0)
heatmap <- heatmap / max(heatmap)                                          
plot_heatmap <- function(heatmap, filename, width = 224, height = 224,    
                          bg = "white", col = terrain.colors(12)) {
  png(filename, width = width, height = height, bg = bg)
  op = par(mar = c(0,0,0,0))
  on.exit({par(op); dev.off()}, add = TRUE)
  rotate <- function(x) t(apply(x, 2, rev))
  image(rotate(heatmap), axes = FALSE, asp = 1, col = col)
}
write_heatmap(heatmap, "elephant_heatmap.png")                             
```

```{r}
library(magick)
library(viridis)
image <- image_read(img_path)                                      
info <- image_info(image)
geometry <- sprintf("%dx%d!", info$width, info$height)
pal <- col2rgb(viridis(20), alpha = TRUE)                          
alpha <- floor(seq(0, 255, length = ncol(pal)))
pal_col <- rgb(t(pal), alpha = alpha, maxColorValue = 255)
write_heatmap(heatmap, "elephant_overlay.png",
              width = 14, height = 14, bg = NA, col = pal_col)
image_read("elephant_overlay.png") %>%                             
  image_resize(geometry, filter = "quadratic") %>%
  image_composite(image, operator = "blend", compose_args = "20") %>%
  plot()
```

